Implement a `cargo fetch` command
authorAlex Crichton <alex@alexcrichton.com>
Thu, 11 Sep 2014 18:50:57 +0000 (11:50 -0700)
committerAlex Crichton <alex@alexcrichton.com>
Tue, 16 Sep 2014 22:59:39 +0000 (15:59 -0700)
This command is used to download all dependencies of a package ahead of time to
ensure that no more network communication will be necessary as part of a build.

cc #358

src/bin/cargo.rs
src/bin/fetch.rs [new file with mode: 0644]
src/cargo/ops/cargo_compile.rs
src/cargo/ops/cargo_fetch.rs [new file with mode: 0644]
src/cargo/ops/cargo_generate_lockfile.rs
src/cargo/ops/mod.rs
tests/test_cargo_compile_git_deps.rs
tests/test_cargo_fetch.rs [new file with mode: 0644]
tests/tests.rs

index 4fe2f3e0176b5a378f896dc39f8ba1b1b6468769..017abb33a2543e47a9d68f0a23ce6100d381cd35 100644 (file)
@@ -55,6 +55,7 @@ macro_rules! each_subcommand( ($macro:ident) => ({
     $macro!(config_for_key)
     $macro!(config_list)
     $macro!(doc)
+    $macro!(fetch)
     $macro!(generate_lockfile)
     $macro!(git_checkout)
     $macro!(locate_project)
diff --git a/src/bin/fetch.rs b/src/bin/fetch.rs
new file mode 100644 (file)
index 0000000..0c02538
--- /dev/null
@@ -0,0 +1,38 @@
+use docopt;
+
+use cargo::ops;
+use cargo::core::{MultiShell};
+use cargo::util::{CliResult, CliError};
+use cargo::util::important_paths::find_root_manifest_for_cwd;
+
+docopt!(Options, "
+Fetch dependencies of a package from the network.
+
+Usage:
+    cargo fetch [options]
+
+Options:
+    -h, --help              Print this message
+    --manifest-path PATH    Path to the manifest to fetch dependencies for
+    -v, --verbose           Use verbose output
+
+If a lockfile is available, this command will ensure that all of the git
+dependencies and/or registries dependencies are downloaded and locally
+available. The network is never touched after a `cargo fetch` unless
+the lockfile changes.
+
+If the lockfile is not available, then this is the equivalent of
+`cargo generate-lockfile`. A lockfile is generated and dependencies are also
+all updated.
+",  flag_manifest_path: Option<String>)
+
+pub fn execute(options: Options, shell: &mut MultiShell) -> CliResult<Option<()>> {
+    shell.set_verbose(options.flag_verbose);
+    let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
+    try!(ops::fetch(&root, shell).map_err(|e| {
+        CliError::from_boxed(e, 101)
+    }));
+    Ok(None)
+}
+
+
index 0c437b777353445d01a40ee770a59c48fd5ee0db..f9cce0b71691f9a31b97f2ec38b691384ab3ae45 100644 (file)
 //!
 
 use std::os;
-use std::collections::{HashMap, HashSet};
+use std::collections::HashMap;
 
 use core::registry::PackageRegistry;
 use core::{MultiShell, Source, SourceId, PackageSet, Target, PackageId};
-use core::{Package, Summary, Resolve, resolver};
+use core::resolver;
 use ops;
 use sources::{PathSource};
 use util::config::{Config, ConfigValue};
@@ -72,26 +72,21 @@ pub fn compile(manifest_path: &Path,
                                                    manifest_path.dir_path()));
 
     let (packages, resolve_with_overrides, sources) = {
-        let _p = profile::start("resolving...");
-        let lockfile = manifest_path.dir_path().join("Cargo.lock");
-        let source_id = package.get_package_id().get_source_id();
-
         let mut config = try!(Config::new(*shell, jobs, target.clone()));
         let mut registry = PackageRegistry::new(&mut config);
-        let dependencies = package.get_dependencies().iter().filter(|dep| {
-            dep.is_transitive() || dev_deps
-        }).map(|d| d.clone()).collect::<Vec<_>>();
 
-        match try!(ops::load_lockfile(&lockfile, source_id)) {
-            Some(r) => try!(add_lockfile_sources(&mut registry, &package, &r)),
-            None => try!(registry.add_sources(package.get_source_ids())),
-        }
+        // First, resolve the package's *listed* dependencies, as well as
+        // downloading and updating all remotes and such.
+        try!(ops::resolve_and_fetch(&mut registry, &package));
 
-        let resolved = try!(resolver::resolve(package.get_package_id(),
-                                              package.get_dependencies(),
-                                              &mut registry));
-        try!(ops::write_resolve(&package, &resolved));
+        // Second, resolve with precisely what we're doing. Filter out
+        // transitive dependencies if necessary, specify features, handle
+        // overrides, etc.
+        let _p = profile::start("resolving w/ overrides...");
 
+        let dependencies = package.get_dependencies().iter().filter(|dep| {
+            dep.is_transitive() || dev_deps
+        }).map(|d| d.clone()).collect::<Vec<_>>();
         try!(registry.add_overrides(override_ids));
         let resolved_with_overrides =
                 try!(resolver::resolve(package.get_package_id(),
@@ -196,63 +191,3 @@ fn scrape_target_config(config: &mut Config,
 
     Ok(())
 }
-
-/// When a lockfile is present, we want to keep as many dependencies at their
-/// original revision as possible. We need to account, however, for
-/// modifications to the manifest in terms of modifying, adding, or deleting
-/// dependencies.
-///
-/// This method will add any appropriate sources from the lockfile into the
-/// registry, and add all other sources from the root package to the registry.
-/// Any dependency which has not been modified has its source added to the
-/// registry (to retain the precise field if possible). Any dependency which
-/// *has* changed has its source id listed in the manifest added and all of its
-/// transitive dependencies are blacklisted to not be added from the lockfile.
-///
-/// TODO: this won't work too well for registry-based packages, but we don't
-///       have many of those anyway so we should be ok for now.
-fn add_lockfile_sources(registry: &mut PackageRegistry,
-                        root: &Package,
-                        resolve: &Resolve) -> CargoResult<()> {
-    let deps = resolve.deps(root.get_package_id()).move_iter().flat_map(|deps| {
-        deps.map(|d| (d.get_name(), d))
-    }).collect::<HashMap<_, _>>();
-
-    let mut sources = vec![root.get_package_id().get_source_id().clone()];
-    let mut to_avoid = HashSet::new();
-    let mut to_add = HashSet::new();
-    for dep in root.get_dependencies().iter() {
-        match deps.find(&dep.get_name()) {
-            Some(&lockfile_dep) => {
-                let summary = Summary::new(lockfile_dep, []);
-                if dep.matches(&summary) {
-                    fill_with_deps(resolve, lockfile_dep, &mut to_add);
-                } else {
-                    fill_with_deps(resolve, lockfile_dep, &mut to_avoid);
-                    sources.push(dep.get_source_id().clone());
-                }
-            }
-            None => sources.push(dep.get_source_id().clone()),
-        }
-    }
-
-    // Only afterward once we know the entire blacklist are the lockfile
-    // sources added.
-    for addition in to_add.iter() {
-        if !to_avoid.contains(addition) {
-            sources.push(addition.get_source_id().clone());
-        }
-    }
-
-    return registry.add_sources(sources);
-
-    fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
-                          set: &mut HashSet<&'a PackageId>) {
-        if !set.insert(dep) { return }
-        for mut deps in resolve.deps(dep).move_iter() {
-            for dep in deps {
-                fill_with_deps(resolve, dep, set);
-            }
-        }
-    }
-}
diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs
new file mode 100644 (file)
index 0000000..3cdb92f
--- /dev/null
@@ -0,0 +1,100 @@
+use std::collections::{HashSet, HashMap};
+
+use core::{MultiShell, Package, PackageId, Summary};
+use core::registry::PackageRegistry;
+use core::resolver::{mod, Resolve};
+use core::source::Source;
+use ops;
+use sources::PathSource;
+use util::{CargoResult, Config};
+use util::profile;
+
+pub fn fetch(manifest_path: &Path,
+             shell: &mut MultiShell) -> CargoResult<()> {
+    let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));
+    try!(source.update());
+    let package = try!(source.get_root_package());
+
+    let mut config = try!(Config::new(shell, None, None));
+    let mut registry = PackageRegistry::new(&mut config);
+    try!(resolve_and_fetch(&mut registry, &package));
+    Ok(())
+}
+
+pub fn resolve_and_fetch(registry: &mut PackageRegistry, package: &Package)
+                         -> CargoResult<Resolve> {
+    let _p = profile::start("resolve and fetch...");
+
+    let lockfile = package.get_manifest_path().dir_path().join("Cargo.lock");
+    let source_id = package.get_package_id().get_source_id();
+    match try!(ops::load_lockfile(&lockfile, source_id)) {
+        Some(r) => try!(add_lockfile_sources(registry, package, &r)),
+        None => try!(registry.add_sources(package.get_source_ids())),
+    }
+
+    let resolved = try!(resolver::resolve(package.get_package_id(),
+                                          package.get_dependencies(),
+                                          registry));
+    try!(ops::write_resolve(package, &resolved));
+    Ok(resolved)
+}
+
+/// When a lockfile is present, we want to keep as many dependencies at their
+/// original revision as possible. We need to account, however, for
+/// modifications to the manifest in terms of modifying, adding, or deleting
+/// dependencies.
+///
+/// This method will add any appropriate sources from the lockfile into the
+/// registry, and add all other sources from the root package to the registry.
+/// Any dependency which has not been modified has its source added to the
+/// registry (to retain the precise field if possible). Any dependency which
+/// *has* changed has its source id listed in the manifest added and all of its
+/// transitive dependencies are blacklisted to not be added from the lockfile.
+///
+/// TODO: this won't work too well for registry-based packages, but we don't
+///       have many of those anyway so we should be ok for now.
+fn add_lockfile_sources(registry: &mut PackageRegistry,
+                        root: &Package,
+                        resolve: &Resolve) -> CargoResult<()> {
+    let deps = resolve.deps(root.get_package_id()).move_iter().flat_map(|deps| {
+        deps.map(|d| (d.get_name(), d))
+    }).collect::<HashMap<_, _>>();
+
+    let mut sources = vec![root.get_package_id().get_source_id().clone()];
+    let mut to_avoid = HashSet::new();
+    let mut to_add = HashSet::new();
+    for dep in root.get_dependencies().iter() {
+        match deps.find(&dep.get_name()) {
+            Some(&lockfile_dep) => {
+                let summary = Summary::new(lockfile_dep, []);
+                if dep.matches(&summary) {
+                    fill_with_deps(resolve, lockfile_dep, &mut to_add);
+                } else {
+                    fill_with_deps(resolve, lockfile_dep, &mut to_avoid);
+                    sources.push(dep.get_source_id().clone());
+                }
+            }
+            None => sources.push(dep.get_source_id().clone()),
+        }
+    }
+
+    // Only afterward once we know the entire blacklist are the lockfile
+    // sources added.
+    for addition in to_add.iter() {
+        if !to_avoid.contains(addition) {
+            sources.push(addition.get_source_id().clone());
+        }
+    }
+
+    return registry.add_sources(sources);
+
+    fn fill_with_deps<'a>(resolve: &'a Resolve, dep: &'a PackageId,
+                          set: &mut HashSet<&'a PackageId>) {
+        if !set.insert(dep) { return }
+        for mut deps in resolve.deps(dep).move_iter() {
+            for dep in deps {
+                fill_with_deps(resolve, dep, set);
+            }
+        }
+    }
+}
index 4875034d1b1076e8b6cb7987f6533e183347ac9a..3ec6560b78cab297e50703afc257dd806f816265 100644 (file)
@@ -16,28 +16,19 @@ use util::toml as cargo_toml;
 pub fn generate_lockfile(manifest_path: &Path,
                          shell: &mut MultiShell)
                          -> CargoResult<()> {
-
-    log!(4, "compile; manifest-path={}", manifest_path.display());
-
     let mut source = try!(PathSource::for_path(&manifest_path.dir_path()));
     try!(source.update());
-
-    // TODO: Move this into PathSource
     let package = try!(source.get_root_package());
-    debug!("loaded package; package={}", package);
-
     let source_ids = package.get_source_ids();
+    let mut config = try!(Config::new(shell, None, None));
 
     let resolve = {
-        let mut config = try!(Config::new(shell, None, None));
-
         let mut registry = PackageRegistry::new(&mut config);
         try!(registry.add_sources(source_ids));
         try!(resolver::resolve(package.get_package_id(),
                                package.get_dependencies(),
                                &mut registry))
     };
-
     try!(write_resolve(&package, &resolve));
     Ok(())
 }
index ba772460fa4661a7638413888b3547854a7d195f..7ea99eb1dd07aafb61cdb1ee4d68e78a285fe656 100644 (file)
@@ -11,6 +11,7 @@ pub use self::cargo_test::{run_tests, run_benches, TestOptions};
 pub use self::cargo_package::package;
 pub use self::cargo_upload::{upload, upload_configuration, UploadConfig};
 pub use self::cargo_upload::{upload_login, http_proxy, http_handle};
+pub use self::cargo_fetch::{fetch, resolve_and_fetch};
 
 mod cargo_clean;
 mod cargo_compile;
@@ -23,3 +24,4 @@ mod cargo_generate_lockfile;
 mod cargo_test;
 mod cargo_package;
 mod cargo_upload;
+mod cargo_fetch;
index c15016786038dfab9a5446031b5f438bef127497..9c4bf20c63f54599414588ae897731fa1dd7b372 100644 (file)
@@ -1244,3 +1244,32 @@ test!(git_dep_build_cmd {
       execs().with_stdout("1\n"));
 })
 
+test!(fetch_downloads {
+    let bar = git_repo("bar", |project| {
+        project.file("Cargo.toml", r#"
+            [package]
+            name = "bar"
+            version = "0.5.0"
+            authors = ["wycats@example.com"]
+        "#)
+        .file("src/lib.rs", "pub fn bar() -> int { 1 }")
+    }).assert();
+
+    let p = project("p1")
+        .file("Cargo.toml", format!(r#"
+            [project]
+            name = "p1"
+            version = "0.5.0"
+            authors = []
+            [dependencies.bar]
+            git = '{}'
+        "#, bar.url()).as_slice())
+        .file("src/main.rs", "fn main() {}");
+    assert_that(p.cargo_process("fetch"),
+                execs().with_status(0).with_stdout(format!("\
+{updating} git repository `{url}`
+", updating = UPDATING, url = bar.url())));
+
+    assert_that(p.process(cargo_dir().join("cargo")).arg("fetch"),
+                execs().with_status(0).with_stdout(""));
+})
diff --git a/tests/test_cargo_fetch.rs b/tests/test_cargo_fetch.rs
new file mode 100644 (file)
index 0000000..7ebd1cd
--- /dev/null
@@ -0,0 +1,21 @@
+use support::{project, execs};
+use hamcrest::assert_that;
+
+fn setup() {}
+
+test!(no_deps {
+    let p = project("foo")
+        .file("Cargo.toml", r#"
+            [package]
+            name = "foo"
+            authors = []
+            version = "0.0.1"
+        "#)
+        .file("src/main.rs", r#"
+            mod a; fn main() {}
+        "#)
+        .file("src/a.rs", "");
+
+    assert_that(p.cargo_process("fetch"),
+                execs().with_status(0).with_stdout(""));
+})
index 7ece4ff5f20ba34f7382c34cfcbfb4f117672ef7..1c7e95c40a8fce818023d3a351185d8b99e81756 100644 (file)
@@ -46,3 +46,4 @@ mod test_cargo_package;
 mod test_cargo_build_auth;
 mod test_cargo_registry;
 mod test_cargo_upload;
+mod test_cargo_fetch;